Parameters

The following parameters determine the regional coverage of the forecast and the positions for which we plot detailed time series. They also determine the location of (temporary) data files, and details for the parallelization with Dask.

In [1]:
# parameters

# regional coverage
lat_min, lat_max = -35, 35
lon_min, lon_max = 0, 360

# good forecasts reach
good_forecast_days = 7  # 7 days

# additional buoy positions to be plotted
# The ones found in the downloaded data will be shown anyway.
added_buoy_positions = [
    {"lat": 20.0, "lon": -38.0},
    {"lat": 15.0, "lon": -38.0},
    {"lat": 21.0, "lon": -23.0},
    {"lat": 12.0, "lon": -23.0},
    {"lat": -6.0, "lon": -10.0},
    {"lat": -10.0 ,"lon": -10.0},
]

# data files
GFS_zarr_store = "tmp_GFS.zarr"
slab_zarr_store = "tmp_slab.zarr"
buoy_file_name = "tmp_buoy_data"
buoy_positions_file = "tmp_buoy_positions.csv"
mimoc_mld_file = "tmp_mimoc_mld.nc"

# dask specifics
dask_kwargs = {"n_workers": 1, "threads_per_worker": 2, "memory_limit": 6e9}

Technial Preamble

Before doing any calculations, we'll need to import a few modules. We'll also start a Dask cluster for parallel execution.

In [2]:
# dask
from dask.distributed import Client

# plotting
from bokeh.models.formatters import DatetimeTickFormatter
import cartopy.crs as ccrs
import cmocean
import geoviews as gv
import holoviews as hv
import hvplot.xarray, hvplot.pandas

# numerics
import numpy as np
import pandas as pd
import xarray as xr

# aux
from functools import reduce
from operator import add
In [3]:
# create Dask cluster
client = Client(**dask_kwargs)
client
Out[3]:

Client

Cluster

  • Workers: 1
  • Cores: 2
  • Memory: 6.00 GB

Get buoy locations from the buoy data set

In [4]:
buoy_positions = pd.read_csv(buoy_positions_file)
added_buoy_positions =pd.DataFrame.from_records(added_buoy_positions)
added_buoy_positions["lon"] = (360.0 + added_buoy_positions["lon"]) % 360.0
buoy_positions = buoy_positions.merge(added_buoy_positions, how="outer")
buoy_positions.head(3)
Out[4]:
lat lon
0 21.0 337.0
1 20.0 322.0
2 15.0 322.0

Load the GFS and slab-model data

In [5]:
ds_GFS = xr.open_zarr(GFS_zarr_store)
ds_slab = xr.open_zarr(slab_zarr_store)
ds_mld = xr.open_dataset(mimoc_mld_file)

Find start of forecast period

We'll need the time stamp of the start of the forecasting data.

In [6]:
start_of_forecast = (~ds_GFS["is_forecast"].astype(bool)).sum().compute().data
start_of_forecast = ds_GFS["time"].data[max(0, start_of_forecast-1)]
print(start_of_forecast)
2021-03-09T12:00:00.000000000
In [7]:
good_forecast_time = np.timedelta64(good_forecast_days, "D")

Restrict regionally

In [8]:
ds_GFS = ds_GFS.sel(
    lat=slice(lat_max, lat_min),
    lon=slice(lon_min, lon_max),
)
ds_slab = ds_slab.sel(
    lat=slice(lat_max, lat_min),
    lon=slice(lon_min, lon_max),
)
In [9]:
ds_slab
Out[9]:
Show/Hide data repr Show/Hide attributes
xarray.Dataset
    • lat: 141
    • lon: 720
    • time: 248
    • lat
      (lat)
      float32
      35.0 34.5 34.0 ... -34.5 -35.0
      _CoordinateAxisType :
      Lat
      units :
      degrees_north
      array([ 35. ,  34.5,  34. ,  33.5,  33. ,  32.5,  32. ,  31.5,  31. ,  30.5,
              30. ,  29.5,  29. ,  28.5,  28. ,  27.5,  27. ,  26.5,  26. ,  25.5,
              25. ,  24.5,  24. ,  23.5,  23. ,  22.5,  22. ,  21.5,  21. ,  20.5,
              20. ,  19.5,  19. ,  18.5,  18. ,  17.5,  17. ,  16.5,  16. ,  15.5,
              15. ,  14.5,  14. ,  13.5,  13. ,  12.5,  12. ,  11.5,  11. ,  10.5,
              10. ,   9.5,   9. ,   8.5,   8. ,   7.5,   7. ,   6.5,   6. ,   5.5,
               5. ,   4.5,   4. ,   3.5,   3. ,   2.5,   2. ,   1.5,   1. ,   0.5,
               0. ,  -0.5,  -1. ,  -1.5,  -2. ,  -2.5,  -3. ,  -3.5,  -4. ,  -4.5,
              -5. ,  -5.5,  -6. ,  -6.5,  -7. ,  -7.5,  -8. ,  -8.5,  -9. ,  -9.5,
             -10. , -10.5, -11. , -11.5, -12. , -12.5, -13. , -13.5, -14. , -14.5,
             -15. , -15.5, -16. , -16.5, -17. , -17.5, -18. , -18.5, -19. , -19.5,
             -20. , -20.5, -21. , -21.5, -22. , -22.5, -23. , -23.5, -24. , -24.5,
             -25. , -25.5, -26. , -26.5, -27. , -27.5, -28. , -28.5, -29. , -29.5,
             -30. , -30.5, -31. , -31.5, -32. , -32.5, -33. , -33.5, -34. , -34.5,
             -35. ], dtype=float32)
    • lon
      (lon)
      float32
      0.0 0.5 1.0 ... 358.5 359.0 359.5
      _CoordinateAxisType :
      Lon
      units :
      degrees_east
      array([  0. ,   0.5,   1. , ..., 358.5, 359. , 359.5], dtype=float32)
    • time
      (time)
      datetime64[ns]
      2021-02-08 ... 2021-03-25T17:00:00
      array(['2021-02-08T00:00:00.000000000', '2021-02-08T06:00:00.000000000',
             '2021-02-08T12:00:00.000000000', ..., '2021-03-25T12:00:00.000000000',
             '2021-03-25T15:00:00.000000000', '2021-03-25T17:00:00.000000000'],
            dtype='datetime64[ns]')
    • u_slab
      (lat, lon, time)
      float32
      dask.array<chunksize=(90, 100, 248), meta=np.ndarray>
      Array Chunk
      Bytes 100.71 MB 8.93 MB
      Shape (141, 720, 248) (90, 100, 248)
      Count 49 Tasks 16 Chunks
      Type float32 numpy.ndarray
      248 720 141
    • umag_slab
      (lat, lon, time)
      float32
      dask.array<chunksize=(90, 100, 248), meta=np.ndarray>
      Array Chunk
      Bytes 100.71 MB 8.93 MB
      Shape (141, 720, 248) (90, 100, 248)
      Count 49 Tasks 16 Chunks
      Type float32 numpy.ndarray
      248 720 141
    • v_slab
      (lat, lon, time)
      float32
      dask.array<chunksize=(90, 100, 248), meta=np.ndarray>
      Array Chunk
      Bytes 100.71 MB 8.93 MB
      Shape (141, 720, 248) (90, 100, 248)
      Count 49 Tasks 16 Chunks
      Type float32 numpy.ndarray
      248 720 141
  • slab_model_H :
    1
In [10]:
buoy_positions = buoy_positions.where(
    buoy_positions["lat"].apply(pd.Interval(lat_min, lat_max).__contains__)
    & buoy_positions["lon"].apply(pd.Interval(lon_min, lon_max).__contains__)
).dropna()

Prepare MLD data

In [11]:
years_data = list(ds_slab.time.groupby("time.year").groups.keys())
years_min = min(years_data)
years_max = min(years_data)
duration_years_data = years_max - years_min + 1

# pad by one
years_min -= 1
duration_years_data += 2

mld_time_coord_lower = xr.DataArray(
    [
        np.datetime64(f"{years_min + m // 12:04d}-{(m % 12) + 1:02d}-01")
        for m in range(0, duration_years_data * 12)
    ],
    dims=("time", )
)
mld_time_coord_upper = xr.DataArray(
    [
        np.datetime64(f"{years_min + m // 12:04d}-{(m % 12) + 1:02d}-01")
        for m in range(1, duration_years_data * 12 + 1)
    ],
    dims=("time", )
)
mld_time_coord = (
    mld_time_coord_lower
    + (mld_time_coord_upper - mld_time_coord_lower) / 2.0
)
display(mld_time_coord)
Show/Hide data repr Show/Hide attributes
xarray.DataArray
  • time: 36
  • 2020-01-16T12:00:00 2020-02-15T12:00:00 ... 2022-12-16T12:00:00
    array(['2020-01-16T12:00:00.000000000', '2020-02-15T12:00:00.000000000',
           '2020-03-16T12:00:00.000000000', '2020-04-16T00:00:00.000000000',
           '2020-05-16T12:00:00.000000000', '2020-06-16T00:00:00.000000000',
           '2020-07-16T12:00:00.000000000', '2020-08-16T12:00:00.000000000',
           '2020-09-16T00:00:00.000000000', '2020-10-16T12:00:00.000000000',
           '2020-11-16T00:00:00.000000000', '2020-12-16T12:00:00.000000000',
           '2021-01-16T12:00:00.000000000', '2021-02-15T00:00:00.000000000',
           '2021-03-16T12:00:00.000000000', '2021-04-16T00:00:00.000000000',
           '2021-05-16T12:00:00.000000000', '2021-06-16T00:00:00.000000000',
           '2021-07-16T12:00:00.000000000', '2021-08-16T12:00:00.000000000',
           '2021-09-16T00:00:00.000000000', '2021-10-16T12:00:00.000000000',
           '2021-11-16T00:00:00.000000000', '2021-12-16T12:00:00.000000000',
           '2022-01-16T12:00:00.000000000', '2022-02-15T00:00:00.000000000',
           '2022-03-16T12:00:00.000000000', '2022-04-16T00:00:00.000000000',
           '2022-05-16T12:00:00.000000000', '2022-06-16T00:00:00.000000000',
           '2022-07-16T12:00:00.000000000', '2022-08-16T12:00:00.000000000',
           '2022-09-16T00:00:00.000000000', '2022-10-16T12:00:00.000000000',
           '2022-11-16T00:00:00.000000000', '2022-12-16T12:00:00.000000000'],
          dtype='datetime64[ns]')
    In [12]:
    mld_expand = xr.concat(
        [ds_mld for n in range(duration_years_data)],
        dim="month"
    ).rename({"month": "time"})
    mld_expand.coords["time"] = mld_time_coord
    mld_expand
    
    Out[12]:
    Show/Hide data repr Show/Hide attributes
    xarray.Dataset
      • lat: 341
      • lon: 720
      • time: 36
      • lat
        (lat)
        float32
        -80.0 -79.5 -79.0 ... 89.5 90.0
        array([-80. , -79.5, -79. , ...,  89. ,  89.5,  90. ], dtype=float32)
      • lon
        (lon)
        float32
        0.0 0.5 1.0 ... 358.5 359.0 359.5
        array([  0. ,   0.5,   1. , ..., 358.5, 359. , 359.5], dtype=float32)
      • time
        (time)
        datetime64[ns]
        2020-01-16T12:00:00 ... 2022-12-16T12:00:00
        array(['2020-01-16T12:00:00.000000000', '2020-02-15T12:00:00.000000000',
               '2020-03-16T12:00:00.000000000', '2020-04-16T00:00:00.000000000',
               '2020-05-16T12:00:00.000000000', '2020-06-16T00:00:00.000000000',
               '2020-07-16T12:00:00.000000000', '2020-08-16T12:00:00.000000000',
               '2020-09-16T00:00:00.000000000', '2020-10-16T12:00:00.000000000',
               '2020-11-16T00:00:00.000000000', '2020-12-16T12:00:00.000000000',
               '2021-01-16T12:00:00.000000000', '2021-02-15T00:00:00.000000000',
               '2021-03-16T12:00:00.000000000', '2021-04-16T00:00:00.000000000',
               '2021-05-16T12:00:00.000000000', '2021-06-16T00:00:00.000000000',
               '2021-07-16T12:00:00.000000000', '2021-08-16T12:00:00.000000000',
               '2021-09-16T00:00:00.000000000', '2021-10-16T12:00:00.000000000',
               '2021-11-16T00:00:00.000000000', '2021-12-16T12:00:00.000000000',
               '2022-01-16T12:00:00.000000000', '2022-02-15T00:00:00.000000000',
               '2022-03-16T12:00:00.000000000', '2022-04-16T00:00:00.000000000',
               '2022-05-16T12:00:00.000000000', '2022-06-16T00:00:00.000000000',
               '2022-07-16T12:00:00.000000000', '2022-08-16T12:00:00.000000000',
               '2022-09-16T00:00:00.000000000', '2022-10-16T12:00:00.000000000',
               '2022-11-16T00:00:00.000000000', '2022-12-16T12:00:00.000000000'],
              dtype='datetime64[ns]')
      • mixed_layer_depth
        (time, lat, lon)
        float32
        nan nan nan ... 32.09516 32.13625
        units :
        m
        array([[[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [26.433268, 26.491982, 26.526407, ..., 26.178778, 26.231415,
                 26.326649],
                [26.24937 , 26.272125, 26.274122, ..., 26.061453, 26.100391,
                 26.175747],
                [26.795238, 26.79645 , 26.766792, ..., 26.669004, 26.694345,
                 26.747482]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [34.60562 , 34.412   , 34.28873 , ..., 34.29361 , 34.493492,
                 34.650978],
                [34.714344, 34.44238 , 34.268047, ..., 34.41533 , 34.659706,
                 34.818394],
                [34.793133, 34.458344, 34.240494, ..., 34.520046, 34.790157,
                 34.943398]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [34.760757, 34.47968 , 34.358986, ..., 34.71889 , 34.976482,
                 35.012016],
                [34.573467, 34.18083 , 33.993065, ..., 34.628742, 34.932842,
                 34.936428],
                [34.3476  , 33.86288 , 33.633575, ..., 34.549816, 34.857048,
                 34.81494 ]],
        
               ...,
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [35.410255, 35.330067, 35.162006, ..., 35.705242, 35.55527 ,
                 35.42727 ],
                [35.491386, 35.424343, 35.27083 , ..., 35.761486, 35.590282,
                 35.4821  ],
                [35.49925 , 35.450314, 35.313107, ..., 35.71577 , 35.53057 ,
                 35.45492 ]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [38.62612 , 38.588093, 38.504982, ..., 38.829372, 38.648785,
                 38.586113],
                [38.737125, 38.697975, 38.58274 , ..., 38.957764, 38.728115,
                 38.665955],
                [38.960854, 38.92994 , 38.800632, ..., 39.167187, 38.916836,
                 38.86398 ]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [31.768108, 31.852535, 31.842632, ..., 31.53782 , 31.470924,
                 31.566288],
                [31.702795, 31.735882, 31.673613, ..., 31.55104 , 31.45795 ,
                 31.533722],
                [32.268738, 32.270744, 32.17937 , ..., 32.219543, 32.09516 ,
                 32.13625 ]]], dtype=float32)
    In [13]:
    mld_slab = mld_expand.interp_like(
        ds_slab.coords["time"]
    ).sel(
        lat=ds_slab.coords["lat"], lon=ds_slab.coords["lon"], method="nearest"
    )
    
    In [14]:
    mld_slab = mld_slab.compute()
    
    In [15]:
    display(mld_slab)
    
    Show/Hide data repr Show/Hide attributes
    xarray.Dataset
      • lat: 141
      • lon: 720
      • time: 248
      • lat
        (lat)
        float32
        35.0 34.5 34.0 ... -34.5 -35.0
        array([ 35. ,  34.5,  34. ,  33.5,  33. ,  32.5,  32. ,  31.5,  31. ,  30.5,
                30. ,  29.5,  29. ,  28.5,  28. ,  27.5,  27. ,  26.5,  26. ,  25.5,
                25. ,  24.5,  24. ,  23.5,  23. ,  22.5,  22. ,  21.5,  21. ,  20.5,
                20. ,  19.5,  19. ,  18.5,  18. ,  17.5,  17. ,  16.5,  16. ,  15.5,
                15. ,  14.5,  14. ,  13.5,  13. ,  12.5,  12. ,  11.5,  11. ,  10.5,
                10. ,   9.5,   9. ,   8.5,   8. ,   7.5,   7. ,   6.5,   6. ,   5.5,
                 5. ,   4.5,   4. ,   3.5,   3. ,   2.5,   2. ,   1.5,   1. ,   0.5,
                 0. ,  -0.5,  -1. ,  -1.5,  -2. ,  -2.5,  -3. ,  -3.5,  -4. ,  -4.5,
                -5. ,  -5.5,  -6. ,  -6.5,  -7. ,  -7.5,  -8. ,  -8.5,  -9. ,  -9.5,
               -10. , -10.5, -11. , -11.5, -12. , -12.5, -13. , -13.5, -14. , -14.5,
               -15. , -15.5, -16. , -16.5, -17. , -17.5, -18. , -18.5, -19. , -19.5,
               -20. , -20.5, -21. , -21.5, -22. , -22.5, -23. , -23.5, -24. , -24.5,
               -25. , -25.5, -26. , -26.5, -27. , -27.5, -28. , -28.5, -29. , -29.5,
               -30. , -30.5, -31. , -31.5, -32. , -32.5, -33. , -33.5, -34. , -34.5,
               -35. ], dtype=float32)
      • lon
        (lon)
        float32
        0.0 0.5 1.0 ... 358.5 359.0 359.5
        array([  0. ,   0.5,   1. , ..., 358.5, 359. , 359.5], dtype=float32)
      • time
        (time)
        datetime64[ns]
        2021-02-08 ... 2021-03-25T17:00:00
        array(['2021-02-08T00:00:00.000000000', '2021-02-08T06:00:00.000000000',
               '2021-02-08T12:00:00.000000000', ..., '2021-03-25T12:00:00.000000000',
               '2021-03-25T15:00:00.000000000', '2021-03-25T17:00:00.000000000'],
              dtype='datetime64[ns]')
      • mixed_layer_depth
        (time, lat, lon)
        float64
        nan nan nan ... 38.85 39.43 40.01
        units :
        m
        array([[[        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                ...,
                [24.79411885, 25.79325146, 26.80602879, ..., 22.77751509,
                 22.94887223, 23.71864642],
                [25.95615917, 26.93199264, 27.90076851, ..., 24.11440562,
                 24.22995231, 24.94106409],
                [27.70444298, 28.49568053, 29.24057789, ..., 26.39417597,
                 26.461704  , 26.94540935]],
        
               [[        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                ...,
                [24.81984101, 25.8305102 , 26.85415561, ..., 22.77896986,
                 22.95691243, 23.73442267],
                [25.98168653, 26.96680724, 27.94425271, ..., 24.1218999 ,
                 24.24131819, 24.95832542],
                [27.73388863, 28.53106993, 29.28159746, ..., 26.41449715,
                 26.48299873, 26.9697017 ]],
        
               [[        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                ...,
                [24.84556318, 25.86776895, 26.90228242, ..., 22.78042464,
                 22.96495263, 23.75019891],
                [26.00721388, 27.00162183, 27.98773691, ..., 24.12939418,
                 24.25268406, 24.97558675],
                [27.76333427, 28.56645933, 29.32261703, ..., 26.43481833,
                 26.50429347, 26.99399405]],
        
               ...,
        
               [[        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                ...,
                [39.37059871, 40.34404542, 41.2266902 , ..., 36.58465676,
                 37.52590661, 38.41951502],
                [39.58178273, 40.40296186, 41.03658476, ..., 37.00328577,
                 37.81699672, 38.67631068],
                [40.6038183 , 41.18753039, 41.55909097, ..., 38.80617892,
                 39.3766128 , 39.9631641 ]],
        
               [[        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                ...,
                [39.41962416, 40.39244775, 41.27526759, ..., 36.6226802 ,
                 37.57012988, 38.46762679],
                [39.62169542, 40.442599  , 41.07686423, ..., 37.03571818,
                 37.85402443, 38.71585324],
                [40.635339  , 41.22034961, 41.5937321 , ..., 38.83428603,
                 39.40674164, 39.99408289]],
        
               [[        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                ...,
                [39.45230778, 40.42471597, 41.30765251, ..., 36.64802915,
                 37.59961206, 38.49970131],
                [39.64830389, 40.46902376, 41.1037172 , ..., 37.05733979,
                 37.87870958, 38.74221494],
                [40.6563528 , 41.24222909, 41.61682618, ..., 38.85302411,
                 39.42682753, 40.01469542]]])

    Scale near-inertial velocities with MLD

    In [16]:
    ds_slab["u_slab"] *= ds_slab.attrs["slab_model_H"] / mld_slab.mixed_layer_depth
    ds_slab["v_slab"] *= ds_slab.attrs["slab_model_H"] / mld_slab.mixed_layer_depth
    ds_slab["umag_slab"] *= ds_slab.attrs["slab_model_H"] / mld_slab.mixed_layer_depth
    

    Max near-inertial speed over good and whole forecast period

    We'll plot the time-maximum of near-inertial speed for the good forecast period and for the whole forecast period.

    First, we construct the plot.

    In [17]:
    slab_umag_good_forecast_max = ds_slab["umag_slab"].sel(
        time=slice(start_of_forecast, start_of_forecast + good_forecast_time)
    ).max("time")
    slab_umag_whole_forecast_max = ds_slab["umag_slab"].sel(
        time=slice(start_of_forecast, None)
    ).max("time")
    
    near_inertial_max_plots = (
        (
            slab_umag_good_forecast_max.hvplot(
                x="lon", y="lat", z="umag_slab",
                clim=(0, 1.0),
                cmap=cmocean.cm.speed,
                frame_width=800,
                hover=False,
                geo=True, coastline=True,
                crs=ccrs.PlateCarree(), projection=ccrs.PlateCarree(),
                title="Near-inertial speed max [m/s] and mixed-layer-depth [m], good forecast period"
            )
            + slab_umag_whole_forecast_max.hvplot(
                x="lon", y="lat", z="umag_slab",
                clim=(0, 1.0),
                cmap=cmocean.cm.speed,
                frame_width=800,
                hover=False,
                geo=True, coastline=True,
                crs=ccrs.PlateCarree(), projection=ccrs.PlateCarree(),
                title="Near-inertial speed max [m/s] and mixed-layer-depth [m], whole forecast period"
            )
        ) * mld_slab.mean("time").mixed_layer_depth.hvplot.contour(
            x="lon", y="lat", geo=True, cmap="gray", hover=True,
            levels=list(range(0, 50, 10)) + list(range(60, 120, 20)), line_width=1.5, alpha=0.5
        ) * buoy_positions.hvplot.points(
            y="lat", x="lon", geo=True, coastline=True,
            marker='diamond',
            fill_color=None, line_color="black",
            line_width=2, size=70,
        ) * gv.feature.grid()
    ).cols(1)
    
    /srv/conda/envs/notebook/lib/python3.7/site-packages/xarray/core/nanops.py:142: RuntimeWarning: Mean of empty slice
      return np.nanmean(a, axis=axis, dtype=dtype)
    

     

    In [18]:
    display(near_inertial_max_plots)
    
    /srv/conda/envs/notebook/lib/python3.7/site-packages/cartopy/io/__init__.py:260: DownloadWarning: Downloading: https://naciscdn.org/naturalearth/110m/physical/ne_110m_graticules_30.zip
      warnings.warn('Downloading: {}'.format(url), DownloadWarning)
    WARNING:param.GeoOverlayPlot03686: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot03686:title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot03686: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot03686:title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04081: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04081:title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04081: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04081:title_format is deprecated. Please use title instead
    

    Near-inertial current timeseries for buoy locations

    In [19]:
    time_series_plots = []
    time_formatter = DatetimeTickFormatter(
        months='%b %Y', days='%b %d'
    )
    
    forecast_spans = (
        hv.VSpan(
            start_of_forecast, start_of_forecast + good_forecast_time
        ).opts(padding=0, color='lightgray')
        * hv.VSpan(
            start_of_forecast + good_forecast_time, None
        ).opts(padding=0, color='pink')
    )
    
    for lat, lon in zip(buoy_positions["lat"], buoy_positions["lon"]):
        local_mld = mld_slab.mixed_layer_depth.sel(lat=lat, lon=lon, method='nearest').mean('time').data
        name = f"{lat}N {lon}E, MLD={local_mld:.0f}m"
        buoy_ds = ds_slab.sel(lat=lat, lon=lon, method="nearest")
        buoy_ds["U20"] = ds_GFS["U20"].sel(lat=lat, lon=lon, method="nearest")
        buoy_ds["V20"] = ds_GFS["V20"].sel(lat=lat, lon=lon, method="nearest")
        
        if (buoy_ds["umag_slab"].max("time").isnull().data.compute()):
            continue
        time_series_plots.append(
            (
                (
                    forecast_spans.redim.label(y="u_slab")
                    * buoy_ds["u_slab"].hvplot.line(label="zonal near-inertial current")
                    * buoy_ds["v_slab"].hvplot.line(label="meridional near-inertial current")
                    * buoy_ds["umag_slab"].hvplot.line(label="near-inertial speed")
                ).options(
                    width=800, height=160, show_grid=True,
                    xaxis=None,
                    legend_cols=False, legend_position='right',
                    ylabel="current [m/s]", title=name
                )
                + (
                    forecast_spans.redim.label(y="U20")
                    * buoy_ds["U20"].hvplot.line(label="zonal wind (20m)")
                    * buoy_ds["V20"].hvplot.line(label="meridional wind (20m)")
                ).options(
                    width=800, height=160, show_grid=True,
                    xformatter=time_formatter,
                    legend_cols=False, legend_position='right',
                    ylabel="wind [m/s]", xlabel=""
                )
            )
        )
    
    time_series_plots = reduce(add, time_series_plots).cols(1)
    

     

    In [20]:
    display(time_series_plots)
    

    Atmospheric conditions over forecast period

    To get a feeling for the atmospheric conditions, we'll plot sea-level pressure anomalies every 12 hours for 3 days before and throughout the whole forecast period.

    Anomalies are calculated relative to the whole data period (usually 30+14 days).

    In [21]:
    SLP = ds_GFS["SLP"].compute()
    SLP_mean = SLP.mean("time")
    SLP_anomaly = (SLP - SLP_mean)
    
    /srv/conda/envs/notebook/lib/python3.7/site-packages/xarray/core/nanops.py:142: RuntimeWarning: Mean of empty slice
      return np.nanmean(a, axis=axis, dtype=dtype)
    
    In [22]:
    plot_every = np.timedelta64(12, "h")
    max_iter = ((SLP_anomaly.coords["time"].max("time") - start_of_forecast) / plot_every).item() // 1 + 1
    
    plot_times = [
        (start_of_forecast + n * plot_every)
        for n in range(-6, int(max_iter))
    ]
    
    plots = []
    
    for plot_time in plot_times:
        title = f"SLP anomaly [hPa], {pd.Timestamp(plot_time).strftime('%Y-%m-%d %H:%M:%S UTC')}"
        if plot_time > start_of_forecast:
            title += f"\t(forecast + {(plot_time - start_of_forecast) / np.timedelta64(1, 'h')}h)"
        try:
            plots.append(
                (
                    SLP_anomaly.sel(time=plot_time, method="nearest").compute().hvplot(
                        clim=(-10, 10),
                        cmap=cmocean.cm.delta,
                        frame_width=800,
                        geo=True, coastline=True,
                        crs=ccrs.PlateCarree(), projection=ccrs.PlateCarree(),
                        hover=False
                    )
                    * buoy_positions.hvplot.points(
                        y="lat", x="lon", geo=True, coastline=True,
                        marker='circle',
                        fill_color=None, line_color="black",
                        line_width=1.5, size=60,
                    )
                    * gv.feature.grid()
                ).opts(
                    title=title,
                    show_grid=True
                )
            )    
        except Exception as e:
            print(f"for {plot_time} I got: {e}")
        
    slp_plot = reduce(add, plots).cols(1)
    

     

    In [23]:
    display(slp_plot)
    

    In [24]:
    !echo "Finished: $(date -Ins) (UTC)"
    
    Finished: 2021-03-10T04:52:22,414380002+00:00 (UTC)